In [1]:
import json
import copy
import time
import random
import numpy as np
import torch
import torch.nn as nn
import torch.optim as optim
import matplotlib.pyplot as plt
from matplotlib import pyplot as plt
from torchsummary import summary
In [2]:
from nmfd_gnn import NMFD_GNN

1: set parameters¶

In [3]:
print (torch.cuda.is_available())
device = torch.device("cuda:0")
random_seed = 42
random.seed(random_seed)
torch.manual_seed(random_seed)
torch.cuda.manual_seed(random_seed)
r = random.random
True
In [4]:
#1.1: settings
M = 10                       #number of time interval in a window
missing_ratio = 0.50
file_name = "m_" + str(M) + "_missing_" + str(int(missing_ratio*100))
print (file_name)

#1.2: hyperparameters
num_epochs, batch_size, learning_rate = 200, 16, 0.001
beta_flow, beta_occ, beta_phy = 1.0, 1.0, 0.1
batch_size_vt = 16  #batch size for evaluation and test
delta_ratio = 0.1   #the ratio of delta in the standard deviation of flow

hyper = {"n_e": num_epochs, "b_s": batch_size, "b_s_vt": batch_size_vt, "l_r": learning_rate,\
         "beta_f": beta_flow, "beta_o": beta_occ, "beta_p": beta_phy, "delta_ratio": delta_ratio}

gnn_dim_1, gnn_dim_2, gnn_dim_3, lstm_dim = 2, 128, 128, 128
p_dim = 10    #column dimension of L1, L2
c_k = 5.5     #meter, the sum of loop width and uniform vehicle length. based on Gero and Daganzo 2008.
theta_ini = [-2.879, 5.207, -2.473, 1.722, 3.619]

hyper_model = {"g_dim_1": gnn_dim_1, "g_dim_2": gnn_dim_2, "g_dim_3": gnn_dim_3, "l_dim": lstm_dim,\
               "p_dim": p_dim, "c_k": c_k, "theta_ini": theta_ini}
max_no_decrease = 30

#1.3: set paths
root_path = "/home/umni2/a/umnilab/users/xue120/umni4/2023_mfd_traffic_london/"
file_path = root_path + "2_prepare_data/" + file_name + "/"
train_path, vali_path, test_path =\
    file_path + "train.json", file_path + "vali.json", file_path + "test.json"
sensor_id_path = file_path + "sensor_id_order.json"
sensor_adj_path = file_path + "sensor_adj.json"
mean_std_path = file_path + "mean_std.json"
m_10_missing_50

2: visualization¶

In [5]:
def visualize_train_loss(total_phy_flow_occ_loss):
    plt.figure(figsize=(4,3), dpi=75)
    t_p_f_o_l = np.array(total_phy_flow_occ_loss)
    e_loss, p_loss, f_loss, o_loss = t_p_f_o_l[:,0], t_p_f_o_l[:,1], t_p_f_o_l[:,2], t_p_f_o_l[:,3]
    x = range(len(e_loss))
    plt.plot(x, p_loss, linewidth=1, label = "phy loss")
    plt.plot(x, f_loss, linewidth=1, label = "flow loss")
    plt.plot(x, o_loss, linewidth=1, label = "occ loss")
    plt.legend()
    plt.title('Loss decline on train')
    plt.xlabel('Epoch')
    plt.ylabel('Loss')
    plt.savefig(file_name + '/' + 'train_loss.png', bbox_inches = 'tight')
    plt.show()
    
def visualize_flow_loss(vali_f_mae, test_f_mae):
    plt.figure(figsize=(4,3), dpi=75)
    x = range(len(vali_f_mae))    
    plt.plot(x, vali_f_mae, linewidth=1, label="Validate")
    plt.plot(x, test_f_mae, linewidth=1, label="Test")
    plt.legend()
    plt.title('MAE of flow on validate/test')
    plt.xlabel('Epoch')
    plt.ylabel('MAE (veh/h)')
    plt.savefig(file_name + '/' + 'flow_mae.png', bbox_inches = 'tight')
    plt.show()
    
def visualize_occ_loss(vali_o_mae, test_o_mae):
    plt.figure(figsize=(4,3), dpi=75)
    x = range(len(vali_o_mae))    
    plt.plot(x, vali_o_mae, linewidth=1, label="Validate")
    plt.plot(x, test_o_mae, linewidth=1, label="Test")
    plt.legend()
    plt.title('MAE of occupancy on validate/test')
    plt.xlabel('Epoch')
    plt.ylabel('MAE')
    plt.savefig(file_name + '/' + 'occ_mae.png',bbox_inches = 'tight')
    plt.show()

3: compute the error¶

In [6]:
def MAELoss(yhat, y):
    return float(torch.mean(torch.div(torch.abs(yhat-y), 1)))

def RMSELoss(yhat, y):
    return float(torch.sqrt(torch.mean((yhat-y)**2)))

def vali_test(model, f, f_mask, o, o_mask, f_o_mean_std, b_s_vt):    
    flow_std, occ_std, n = f_o_mean_std[1], f_o_mean_std[3], len(f)
    f_mae_list, f_rmse_list, o_mae_list, o_rmse_list, num_list = list(), list(), list(), list(), list()
    for i in range(0, n, b_s_vt):
        s, e = i, np.min([i+b_s_vt, n])
        num_list.append(e-s)
        bf, bo, bf_mask, bo_mask = f[s: e], o[s: e], f_mask[s: e], o_mask[s: e]  
        bf_hat, bo_hat, bq_hat, bq_theta = model.run(bf_mask, bo_mask)
        bf_hat, bo_hat = bf_hat.cpu(), bo_hat.cpu()
        bf_mae, bf_rmse = MAELoss(bf_hat, bf)*flow_std, RMSELoss(bf_hat, bf)*flow_std
        bo_mae, bo_rmse = MAELoss(bo_hat, bo)*occ_std, RMSELoss(bo_hat, bo)*occ_std
        f_mae_list.append(bf_mae)
        f_rmse_list.append(bf_rmse)
        o_mae_list.append(bo_mae)
        o_rmse_list.append(bo_rmse)
    f_mae, o_mae = np.dot(f_mae_list, num_list)/n, np.dot(o_mae_list, num_list)/n
    f_rmse = np.sqrt(np.dot(np.multiply(f_rmse_list, f_rmse_list), num_list)/n)
    o_rmse = np.sqrt(np.dot(np.multiply(o_rmse_list, o_rmse_list), num_list)/n)
    return f_mae, f_rmse, o_mae, o_rmse

def evaluate(model, vt_f, vt_o, vt_f_m, vt_o_m, f_o_mean_std, b_s_vt): #vt: vali_test
    vt_f_mae, vt_f_rmse, vt_o_mae, vt_o_rmse  =\
         vali_test(model, vt_f, vt_f_m, vt_o, vt_o_m, f_o_mean_std, b_s_vt)
    return vt_f_mae, vt_f_rmse, vt_o_mae, vt_o_rmse

4: train¶

In [7]:
import torch
In [8]:
#4.1: one training epoch
def train_epoch(model, opt, criterion, train_f_x, train_f_y, train_o_x, train_o_y, hyper, flow_std_squ, delta): 
    #f: flow; o: occupancy
    model.train()
    losses, p_losses, f_losses, o_losses = list(), list(), list(), list()
    
    beta_f, beta_o, beta_p, b_s = hyper["beta_f"], hyper["beta_o"], hyper["beta_p"], hyper["b_s"]
    n = len(train_f_x)
    print ("# batch: ", int(n/b_s))   
    
    for i in range(0, n-b_s, b_s):
        time1 = time.time()
        x_f_batch, y_f_batch = train_f_x[i: i+b_s], train_f_y[i: i+b_s]   
        x_o_batch, y_o_batch = train_o_x[i: i+b_s], train_o_y[i: i+b_s]

        opt.zero_grad() 
        y_f_hat, y_o_hat, q_hat, q_theta = model.run(x_f_batch, x_o_batch)
        
        #p_loss = criterion(q_hat, q_theta).cpu()                #physical loss 
        #p_loss = p_loss/flow_std_squ
        
        #hinge loss
        q_gap = q_hat - q_theta       
        delta_gap = torch.ones(q_gap.shape, device=device)*delta
        zero_gap = torch.zeros(q_gap.shape, device=device)            #(n, m)
        hl_loss = torch.max(q_gap-delta_gap, zero_gap) + torch.max(-delta_gap-q_gap, zero_gap) 
        hl_loss = hl_loss/flow_std_squ
        p_loss = criterion(hl_loss, zero_gap).cpu()            #(n, m)
        f_loss = criterion(y_f_hat.cpu(), y_f_batch)              #data loss of flow
        o_loss = criterion(y_o_hat.cpu(), y_o_batch)              #data loss of occupancy
        
        loss = beta_f*f_loss + beta_o*o_loss + beta_p*p_loss
        
        loss.backward()
        opt.step()
        losses.append(loss.data.numpy())
        p_losses.append(p_loss.data.numpy())
        f_losses.append(f_loss.data.numpy())
        o_losses.append(o_loss.data.numpy())
        
        if i % (64*b_s) == 0:
            print ("i_batch: ", i/b_s)
            print ("the loss for this batch: ", loss.data.numpy())
            print ("flow loss", f_loss.data.numpy())
            print ("occ loss", o_loss.data.numpy())
            time2 = time.time()
            print ("time for this batch", time2-time1)
            print ("----------------------------------")
        n_loss = float(len(losses)+0.000001)
        aver_loss = sum(losses)/n_loss
        aver_p_loss = sum(p_losses)/n_loss
        aver_f_loss = sum(f_losses)/n_loss
        aver_o_loss = sum(o_losses)/n_loss
    return aver_loss, model, aver_p_loss, aver_f_loss, aver_o_loss

#4.2: all train epochs
def train_process(model, criterion, train, vali, test, hyper, f_o_mean_std):
    total_phy_flow_occ_loss = list()
    
    n_mse_flow_occ = 0 #mse(flow) + mse(occ) for validation sets.
    f_std = f_o_mean_std[1]
    
    vali_f, vali_o = vali["flow"], vali["occupancy"] 
    vali_f_m, vali_o_m = vali["flow_mask"].to(device), vali["occupancy_mask"].to(device) 
    test_f, test_o = test["flow"], test["occupancy"] 
    test_f_m, test_o_m = test["flow_mask"].to(device), test["occupancy_mask"].to(device) 
    
    l_r, n_e = hyper["l_r"], hyper["n_e"]
    opt = optim.Adam(model.parameters(), l_r, betas = (0.9,0.999), weight_decay=0.0001)
    opt_scheduler = torch.optim.lr_scheduler.MultiStepLR(opt, milestones=[150])
    
    print ("# epochs ", n_e)
    r_vali_f_mae, r_vali_o_mae, r_test_f_mae, r_test_o_mae = list(), list(), list(), list()
    r_vali_f_rmse, r_vali_o_rmse, r_test_f_rmse, r_test_o_rmse = list(), list(), list(), list()
    
    flow_std_squ = np.power(f_std, 2)
    
    no_decrease = 0
    for i in range(n_e):
        print ("----------------an epoch starts-------------------")
        #time1_s = time.time()
        
        time_s = time.time()
        print ("i_epoch: ", i)
        n_train = len(train["flow"])
        number_list = copy.copy(list(range(n_train)))
        random.shuffle(number_list, random = r)
        shuffle_idx = torch.tensor(number_list)
        train_x_f, train_y_f = train["flow_mask"][shuffle_idx], train["flow"][shuffle_idx]
        train_x_o, train_y_o = train["occupancy_mask"][shuffle_idx], train["occupancy"][shuffle_idx] 
        
        delta = hyper["delta_ratio"] * f_std
        aver_loss, model, aver_p_loss, aver_f_loss, aver_o_loss =\
            train_epoch(model, opt, criterion, train_x_f.to(device), train_y_f,\
                        train_x_o.to(device), train_y_o, hyper, flow_std_squ, delta)
        opt_scheduler.step()
        
        total_phy_flow_occ_loss.append([aver_loss, aver_p_loss, aver_f_loss, aver_o_loss])
        print ("train loss for this epoch: ", round(aver_loss, 6))
        
        #evaluate
        b_s_vt = hyper["b_s_vt"]
        vali_f_mae, vali_f_rmse, vali_o_mae, vali_o_rmse =\
            evaluate(model, vali_f, vali_o, vali_f_m, vali_o_m, f_o_mean_std, b_s_vt)
        test_f_mae, test_f_rmse, test_o_mae, test_o_rmse =\
            evaluate(model, test_f, test_o, test_f_m, test_o_m, f_o_mean_std, b_s_vt)  
        
        r_vali_f_mae.append(vali_f_mae)
        r_test_f_mae.append(test_f_mae)
        r_vali_o_mae.append(vali_o_mae)
        r_test_o_mae.append(test_o_mae)
        r_vali_f_rmse.append(vali_f_rmse)
        r_test_f_rmse.append(test_f_rmse)
        r_vali_o_rmse.append(vali_o_rmse)
        r_test_o_rmse.append(test_o_rmse)
        
        visualize_train_loss(total_phy_flow_occ_loss)
        visualize_flow_loss(r_vali_f_mae, r_test_f_mae)
        visualize_occ_loss(r_vali_o_mae, r_test_o_mae)
        time_e = time.time()
        print ("time for this epoch", time_e - time_s)
        
        performance = {"train": total_phy_flow_occ_loss,\
                  "vali": [r_vali_f_mae, r_vali_f_rmse, r_vali_o_mae, r_vali_o_rmse],\
                  "test": [r_test_f_mae, r_test_f_rmse, r_test_o_mae, r_test_o_rmse]}
        subfile =  open(file_name + '/' + 'performance'+'.json','w')
        json.dump(performance, subfile)
        subfile.close()
        
        #early stop
        flow_std, occ_std = f_o_mean_std[1], f_o_mean_std[3]
        norm_f_rmse, norm_o_rmse = vali_f_rmse/flow_std, vali_o_rmse/occ_std
        norm_sum_mse = norm_f_rmse*norm_f_rmse + norm_o_rmse*norm_o_rmse
        
        if n_mse_flow_occ > 0:
            min_until_now = min([min_until_now, norm_sum_mse])
        else:
            min_until_now = 1000000.0  
        if norm_sum_mse > min_until_now:
            no_decrease = no_decrease+1
        else:
            no_decrease = 0
        if no_decrease == max_no_decrease:
            print ("Early stop at the " + str(i+1) + "-th epoch")
            return total_phy_flow_occ_loss, model 
        n_mse_flow_occ = n_mse_flow_occ + 1
        
        print ("No_decrease: ", no_decrease)
    return total_phy_flow_occ_loss, model    

5: prepare tensors¶

In [9]:
def tensorize(train_vali_test):
    result = dict()
    result["flow"] = torch.tensor(train_vali_test["flow"]) 
    result["flow_mask"] = torch.tensor(train_vali_test["flow_mask"])     
    result["occupancy"] = torch.tensor(train_vali_test["occupancy"]) 
    result["occupancy_mask"] = torch.tensor(train_vali_test["occupancy_mask"]) 
    return result

def normalize_flow_occ(tvt, f_o_mean_std):  #tvt: train, vali, test
    #flow
    f_mean, f_std = f_o_mean_std[0], f_o_mean_std[1]
    f_mask, f = tvt["flow_mask"], tvt["flow"]
    tvt["flow_mask"] = ((np.array(f_mask)-f_mean)/f_std).tolist()
    tvt["flow"] = ((np.array(f)-f_mean)/f_std).tolist()
    
    #occ
    o_mean, o_std = f_o_mean_std[2], f_o_mean_std[3]
    o_mask, o = tvt["occupancy_mask"], tvt["occupancy"]
    tvt["occupancy_mask"] = ((np.array(o_mask)-o_mean)/o_std).tolist()
    tvt["occupancy"] = ((np.array(o)-o_mean)/o_std).tolist()   
    return tvt

def transform_distance(d_matrix):
    sigma, n_row, n_col = np.std(d_matrix), len(d_matrix), len(d_matrix[0])
    sigma_square = sigma*sigma
    for i in range(n_row):
        for j in range(n_col):
            d_i_j = d_matrix[i][j]
            d_matrix[i][j] = np.exp(0.0-10000.0*d_i_j*d_i_j/sigma_square)
    return d_matrix

def load_data(train_path, vali_path, test_path, sensor_adj_path, mean_std_path, sensor_id_path):
    mean_std = json.load(open(mean_std_path))
    f_mean, f_std, o_mean, o_std =\
        mean_std["f_mean"], mean_std["f_std"], mean_std["o_mean"], mean_std["o_std"]
    f_o_mean_std = [f_mean, f_std, o_mean, o_std]
    
    train = json.load(open(train_path))
    vali = json.load(open(vali_path))
    test = json.load(open(test_path))
    adj = json.load(open(sensor_adj_path))["adj"]
    n_sensor = len(train["flow"][0])    
    
    train = tensorize(normalize_flow_occ(train, f_o_mean_std))
    vali = tensorize(normalize_flow_occ(vali, f_o_mean_std))
    test = tensorize(normalize_flow_occ(test, f_o_mean_std))

    adj = torch.tensor(transform_distance(adj), device=device).float()   
    
    df_sensor_id = json.load(open(sensor_id_path))
    sensor_length = [0.0 for i in range(n_sensor)]
    for sensor in df_sensor_id:
        sensor_length[df_sensor_id[sensor][0]] = df_sensor_id[sensor][3]
        
    return train, vali, test, adj, n_sensor, f_o_mean_std, sensor_length

6: main¶

In [10]:
#6.1 load the data
time1 = time.time()
train, vali, test, adj, n_sensor, f_o_mean_std, sensor_length =\
    load_data(train_path, vali_path, test_path, sensor_adj_path, mean_std_path, sensor_id_path)
time2 = time.time()
print (time2-time1)
8.184683561325073
In [11]:
print (len(train["flow"]))
print (len(vali["flow"]))
print (len(test["flow"]))
print (f_o_mean_std)
1546
509
510
[426.01546608926594, 254.82043085525967, 0.1815290985289925, 0.18313943695883658]
In [12]:
model = NMFD_GNN(n_sensor, M, hyper_model, f_o_mean_std, sensor_length, adj).to(device)   
cri = nn.MSELoss() 
In [13]:
#6.2: train the model
total_phy_flow_occ_loss, trained_model = train_process(model, cri, train, vali, test, hyper, f_o_mean_std)
# epochs  200
----------------an epoch starts-------------------
i_epoch:  0
# batch:  96
i_batch:  0.0
the loss for this batch:  1.4848627
flow loss 0.96840364
occ loss 0.5164554
time for this batch 0.5767495632171631
----------------------------------
i_batch:  64.0
the loss for this batch:  0.5330178
flow loss 0.29276595
occ loss 0.24024825
time for this batch 0.2957487106323242
----------------------------------
train loss for this epoch:  0.67295
time for this epoch 35.35088920593262
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  1
# batch:  96
i_batch:  0.0
the loss for this batch:  0.504915
flow loss 0.2062798
occ loss 0.29863065
time for this batch 0.2524745464324951
----------------------------------
i_batch:  64.0
the loss for this batch:  0.46207827
flow loss 0.2263971
occ loss 0.23567651
time for this batch 0.2880067825317383
----------------------------------
train loss for this epoch:  0.374251
time for this epoch 36.26380181312561
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  2
# batch:  96
i_batch:  0.0
the loss for this batch:  0.38819546
flow loss 0.18900731
occ loss 0.19918442
time for this batch 0.25884556770324707
----------------------------------
i_batch:  64.0
the loss for this batch:  0.3044828
flow loss 0.14840803
occ loss 0.15607126
time for this batch 0.3065369129180908
----------------------------------
train loss for this epoch:  0.328263
time for this epoch 35.72489857673645
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  3
# batch:  96
i_batch:  0.0
the loss for this batch:  0.38382387
flow loss 0.1616818
occ loss 0.2221377
time for this batch 0.2742955684661865
----------------------------------
i_batch:  64.0
the loss for this batch:  0.24658458
flow loss 0.12667866
occ loss 0.11990277
time for this batch 0.2919182777404785
----------------------------------
train loss for this epoch:  0.304061
time for this epoch 36.43282461166382
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  4
# batch:  96
i_batch:  0.0
the loss for this batch:  0.30753264
flow loss 0.13512178
occ loss 0.17240673
time for this batch 0.2702476978302002
----------------------------------
i_batch:  64.0
the loss for this batch:  0.29918933
flow loss 0.14269549
occ loss 0.15648997
time for this batch 0.3111114501953125
----------------------------------
train loss for this epoch:  0.287548
time for this epoch 36.76224446296692
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  5
# batch:  96
i_batch:  0.0
the loss for this batch:  0.29134503
flow loss 0.15168162
occ loss 0.13966066
time for this batch 0.27826666831970215
----------------------------------
i_batch:  64.0
the loss for this batch:  0.26444674
flow loss 0.12989955
occ loss 0.13454352
time for this batch 0.2848038673400879
----------------------------------
train loss for this epoch:  0.267849
time for this epoch 36.37389659881592
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  6
# batch:  96
i_batch:  0.0
the loss for this batch:  0.24478365
flow loss 0.10612664
occ loss 0.13865367
time for this batch 0.27057385444641113
----------------------------------
i_batch:  64.0
the loss for this batch:  0.26467466
flow loss 0.11512673
occ loss 0.14954412
time for this batch 0.30602574348449707
----------------------------------
train loss for this epoch:  0.254552
time for this epoch 36.506502866744995
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  7
# batch:  96
i_batch:  0.0
the loss for this batch:  0.22008352
flow loss 0.11125137
occ loss 0.10882893
time for this batch 0.26857757568359375
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2270084
flow loss 0.10302466
occ loss 0.12397967
time for this batch 0.307401180267334
----------------------------------
train loss for this epoch:  0.248707
time for this epoch 36.476739168167114
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  8
# batch:  96
i_batch:  0.0
the loss for this batch:  0.24095438
flow loss 0.111260995
occ loss 0.12969047
time for this batch 0.26579713821411133
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2455107
flow loss 0.101134025
occ loss 0.14437304
time for this batch 0.3143503665924072
----------------------------------
train loss for this epoch:  0.239479
time for this epoch 36.11579632759094
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  9
# batch:  96
i_batch:  0.0
the loss for this batch:  0.21575913
flow loss 0.09722692
occ loss 0.11852911
time for this batch 0.27146196365356445
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22005168
flow loss 0.10042532
occ loss 0.11962275
time for this batch 0.3151531219482422
----------------------------------
train loss for this epoch:  0.234619
time for this epoch 37.074735164642334
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  10
# batch:  96
i_batch:  0.0
the loss for this batch:  0.27981463
flow loss 0.10427543
occ loss 0.175535
time for this batch 0.2644953727722168
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2359231
flow loss 0.10845666
occ loss 0.12746277
time for this batch 0.30802297592163086
----------------------------------
train loss for this epoch:  0.230569
time for this epoch 36.481016874313354
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  11
# batch:  96
i_batch:  0.0
the loss for this batch:  0.25549978
flow loss 0.090290025
occ loss 0.1652063
time for this batch 0.2687368392944336
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21342996
flow loss 0.100876875
occ loss 0.11254953
time for this batch 0.2679259777069092
----------------------------------
train loss for this epoch:  0.229597
time for this epoch 36.01028394699097
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  12
# batch:  96
i_batch:  0.0
the loss for this batch:  0.21924475
flow loss 0.08504423
occ loss 0.13419698
time for this batch 0.2635970115661621
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18196929
flow loss 0.08549277
occ loss 0.096472986
time for this batch 0.30960702896118164
----------------------------------
train loss for this epoch:  0.222377
time for this epoch 36.02855658531189
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  13
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1860116
flow loss 0.08458998
occ loss 0.101418346
time for this batch 0.261707067489624
----------------------------------
i_batch:  64.0
the loss for this batch:  0.24673149
flow loss 0.09567285
occ loss 0.15105446
time for this batch 0.29612064361572266
----------------------------------
train loss for this epoch:  0.219995
time for this epoch 37.14602303504944
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  14
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1947659
flow loss 0.08510791
occ loss 0.109654725
time for this batch 0.22641491889953613
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22287405
flow loss 0.08489458
occ loss 0.13797498
time for this batch 0.3033750057220459
----------------------------------
train loss for this epoch:  0.217076
time for this epoch 36.83862829208374
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  15
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17047372
flow loss 0.07279774
occ loss 0.09767276
time for this batch 0.2696096897125244
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22185308
flow loss 0.08148939
occ loss 0.14036043
time for this batch 0.29004430770874023
----------------------------------
train loss for this epoch:  0.215989
time for this epoch 36.49088501930237
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  16
# batch:  96
i_batch:  0.0
the loss for this batch:  0.21889906
flow loss 0.0786569
occ loss 0.14023878
time for this batch 0.27796173095703125
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20558225
flow loss 0.08659989
occ loss 0.118978955
time for this batch 0.3019697666168213
----------------------------------
train loss for this epoch:  0.213245
time for this epoch 36.54662370681763
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  17
# batch:  96
i_batch:  0.0
the loss for this batch:  0.2195569
flow loss 0.08192747
occ loss 0.13762589
time for this batch 0.260636568069458
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18881476
flow loss 0.077533394
occ loss 0.11127805
time for this batch 0.30052924156188965
----------------------------------
train loss for this epoch:  0.211795
time for this epoch 36.75710964202881
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  18
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1934085
flow loss 0.08154051
occ loss 0.11186494
time for this batch 0.2787659168243408
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18356383
flow loss 0.08387903
occ loss 0.09968159
time for this batch 0.30490875244140625
----------------------------------
train loss for this epoch:  0.209775
time for this epoch 36.953593015670776
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  19
# batch:  96
i_batch:  0.0
the loss for this batch:  0.2401194
flow loss 0.09295917
occ loss 0.14715616
time for this batch 0.28457212448120117
----------------------------------
i_batch:  64.0
the loss for this batch:  0.23262028
flow loss 0.09302713
occ loss 0.1395889
time for this batch 0.28689122200012207
----------------------------------
train loss for this epoch:  0.210994
time for this epoch 36.93781065940857
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  20
# batch:  96
i_batch:  0.0
the loss for this batch:  0.18188073
flow loss 0.07853105
occ loss 0.10334596
time for this batch 0.2651689052581787
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16225863
flow loss 0.0779114
occ loss 0.084344156
time for this batch 0.26715898513793945
----------------------------------
train loss for this epoch:  0.207553
time for this epoch 36.71192169189453
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  21
# batch:  96
i_batch:  0.0
the loss for this batch:  0.22268672
flow loss 0.07882426
occ loss 0.14385857
time for this batch 0.2740635871887207
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19573662
flow loss 0.08823739
occ loss 0.107495286
time for this batch 0.2738208770751953
----------------------------------
train loss for this epoch:  0.205157
time for this epoch 36.22328162193298
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  22
# batch:  96
i_batch:  0.0
the loss for this batch:  0.22048828
flow loss 0.08721264
occ loss 0.1332718
time for this batch 0.26253271102905273
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22229864
flow loss 0.08217304
occ loss 0.14012223
time for this batch 0.28227829933166504
----------------------------------
train loss for this epoch:  0.2059
time for this epoch 35.55528903007507
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  23
# batch:  96
i_batch:  0.0
the loss for this batch:  0.22821781
flow loss 0.08434923
occ loss 0.14386481
time for this batch 0.2720198631286621
----------------------------------
i_batch:  64.0
the loss for this batch:  0.24997693
flow loss 0.09495654
occ loss 0.15501618
time for this batch 0.29602837562561035
----------------------------------
train loss for this epoch:  0.20324
time for this epoch 37.21440505981445
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  24
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1917189
flow loss 0.07253474
occ loss 0.11918025
time for this batch 0.2591521739959717
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22302781
flow loss 0.08475726
occ loss 0.13826638
time for this batch 0.3020796775817871
----------------------------------
train loss for this epoch:  0.20225
time for this epoch 36.53205847740173
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  25
# batch:  96
i_batch:  0.0
the loss for this batch:  0.20427763
flow loss 0.08246567
occ loss 0.12180783
time for this batch 0.2750389575958252
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20641851
flow loss 0.081985965
occ loss 0.124428675
time for this batch 0.30574846267700195
----------------------------------
train loss for this epoch:  0.200115
time for this epoch 36.2480411529541
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  26
# batch:  96
i_batch:  0.0
the loss for this batch:  0.18382381
flow loss 0.07596087
occ loss 0.107859656
time for this batch 0.26570773124694824
----------------------------------
i_batch:  64.0
the loss for this batch:  0.25091738
flow loss 0.087745525
occ loss 0.16316763
time for this batch 0.3085489273071289
----------------------------------
train loss for this epoch:  0.200239
time for this epoch 36.809396266937256
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  27
# batch:  96
i_batch:  0.0
the loss for this batch:  0.18273313
flow loss 0.071750864
occ loss 0.11097878
time for this batch 0.25365757942199707
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15688139
flow loss 0.070483096
occ loss 0.08639549
time for this batch 0.30730485916137695
----------------------------------
train loss for this epoch:  0.199173
time for this epoch 35.70003390312195
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  28
# batch:  96
i_batch:  0.0
the loss for this batch:  0.18956283
flow loss 0.082961835
occ loss 0.10659733
time for this batch 0.2703700065612793
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21013807
flow loss 0.07582954
occ loss 0.13430525
time for this batch 0.30036473274230957
----------------------------------
train loss for this epoch:  0.198323
time for this epoch 36.33100438117981
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  29
# batch:  96
i_batch:  0.0
the loss for this batch:  0.21135007
flow loss 0.07876875
occ loss 0.13257745
time for this batch 0.25034642219543457
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2038164
flow loss 0.07802365
occ loss 0.12578885
time for this batch 0.29460811614990234
----------------------------------
train loss for this epoch:  0.196068
time for this epoch 36.5717294216156
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  30
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15656552
flow loss 0.06816195
occ loss 0.088400185
time for this batch 0.26603150367736816
----------------------------------
i_batch:  64.0
the loss for this batch:  0.23535305
flow loss 0.08681808
occ loss 0.14853051
time for this batch 0.3082709312438965
----------------------------------
train loss for this epoch:  0.196247
time for this epoch 36.39078092575073
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  31
# batch:  96
i_batch:  0.0
the loss for this batch:  0.21093604
flow loss 0.07945277
occ loss 0.13147928
time for this batch 0.29892396926879883
----------------------------------
i_batch:  64.0
the loss for this batch:  0.23003227
flow loss 0.07967452
occ loss 0.15035376
time for this batch 0.3144857883453369
----------------------------------
train loss for this epoch:  0.194247
time for this epoch 36.363056659698486
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  32
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16577658
flow loss 0.07009601
occ loss 0.095677584
time for this batch 0.2851865291595459
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20187029
flow loss 0.08237076
occ loss 0.11949633
time for this batch 0.3075888156890869
----------------------------------
train loss for this epoch:  0.19523
time for this epoch 36.88019013404846
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  33
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16595392
flow loss 0.06682661
occ loss 0.09912397
time for this batch 0.27524614334106445
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1997519
flow loss 0.079427235
occ loss 0.12032048
time for this batch 0.3018186092376709
----------------------------------
train loss for this epoch:  0.191345
time for this epoch 36.57381319999695
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  34
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16905616
flow loss 0.07144752
occ loss 0.09760567
time for this batch 0.2635214328765869
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21878001
flow loss 0.08692749
occ loss 0.13184842
time for this batch 0.29453229904174805
----------------------------------
train loss for this epoch:  0.192792
time for this epoch 36.56272745132446
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  35
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1751798
flow loss 0.07813632
occ loss 0.097040124
time for this batch 0.28175997734069824
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18963453
flow loss 0.07237668
occ loss 0.11725437
time for this batch 0.2951846122741699
----------------------------------
train loss for this epoch:  0.190079
time for this epoch 37.18488931655884
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  36
# batch:  96
i_batch:  0.0
the loss for this batch:  0.18572682
flow loss 0.07366733
occ loss 0.112055466
time for this batch 0.2374277114868164
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21070947
flow loss 0.08351169
occ loss 0.12719388
time for this batch 0.2873697280883789
----------------------------------
train loss for this epoch:  0.190372
time for this epoch 36.48785901069641
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  37
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17833066
flow loss 0.06895015
occ loss 0.10937741
time for this batch 0.24336719512939453
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15828165
flow loss 0.07491159
occ loss 0.08336729
time for this batch 0.27527856826782227
----------------------------------
train loss for this epoch:  0.18904
time for this epoch 35.32898545265198
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  38
# batch:  96
i_batch:  0.0
the loss for this batch:  0.19937101
flow loss 0.08110639
occ loss 0.11826072
time for this batch 0.2968564033508301
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18770187
flow loss 0.071989425
occ loss 0.11570901
time for this batch 0.2882812023162842
----------------------------------
train loss for this epoch:  0.188092
time for this epoch 37.20861530303955
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  39
# batch:  96
i_batch:  0.0
the loss for this batch:  0.2059547
flow loss 0.08343612
occ loss 0.122514345
time for this batch 0.27173566818237305
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17996256
flow loss 0.0725708
occ loss 0.10738864
time for this batch 0.2989482879638672
----------------------------------
train loss for this epoch:  0.187626
time for this epoch 35.988420486450195
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  40
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17026931
flow loss 0.06977387
occ loss 0.10049239
time for this batch 0.29137110710144043
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21843845
flow loss 0.0869526
occ loss 0.13148217
time for this batch 0.29509997367858887
----------------------------------
train loss for this epoch:  0.192335
time for this epoch 35.81845259666443
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  41
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17741044
flow loss 0.07096579
occ loss 0.106441356
time for this batch 0.25344324111938477
----------------------------------
i_batch:  64.0
the loss for this batch:  0.13557583
flow loss 0.07164313
occ loss 0.063929975
time for this batch 0.32034921646118164
----------------------------------
train loss for this epoch:  0.187096
time for this epoch 36.910390853881836
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  42
# batch:  96
i_batch:  0.0
the loss for this batch:  0.21570982
flow loss 0.080457695
occ loss 0.13524796
time for this batch 0.2680227756500244
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1924775
flow loss 0.077112556
occ loss 0.11536118
time for this batch 0.2730860710144043
----------------------------------
train loss for this epoch:  0.187118
time for this epoch 34.674721240997314
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  43
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17942195
flow loss 0.076459266
occ loss 0.10295859
time for this batch 0.279766321182251
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17702039
flow loss 0.07085346
occ loss 0.10616301
time for this batch 0.28073811531066895
----------------------------------
train loss for this epoch:  0.185205
time for this epoch 35.67924904823303
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  44
# batch:  96
i_batch:  0.0
the loss for this batch:  0.18513392
flow loss 0.06956361
occ loss 0.11556615
time for this batch 0.2591056823730469
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17918205
flow loss 0.06628051
occ loss 0.11289803
time for this batch 0.2177410125732422
----------------------------------
train loss for this epoch:  0.184943
time for this epoch 35.05337119102478
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  45
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1823433
flow loss 0.07246921
occ loss 0.10987043
time for this batch 0.26689743995666504
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17665713
flow loss 0.07267065
occ loss 0.10398312
time for this batch 0.2892742156982422
----------------------------------
train loss for this epoch:  0.183888
time for this epoch 34.55796837806702
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  46
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17663854
flow loss 0.07142524
occ loss 0.10520952
time for this batch 0.254258394241333
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18716614
flow loss 0.07467027
occ loss 0.11249165
time for this batch 0.28038477897644043
----------------------------------
train loss for this epoch:  0.182633
time for this epoch 36.276222229003906
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  47
# batch:  96
i_batch:  0.0
the loss for this batch:  0.20345166
flow loss 0.079716824
occ loss 0.123730764
time for this batch 0.2537808418273926
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2056253
flow loss 0.08136269
occ loss 0.1242588
time for this batch 0.28861331939697266
----------------------------------
train loss for this epoch:  0.184127
time for this epoch 35.83635854721069
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  48
# batch:  96
i_batch:  0.0
the loss for this batch:  0.20284949
flow loss 0.077887975
occ loss 0.12495737
time for this batch 0.26705169677734375
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17332548
flow loss 0.0714117
occ loss 0.10191004
time for this batch 0.31632304191589355
----------------------------------
train loss for this epoch:  0.183839
time for this epoch 36.09261655807495
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  49
# batch:  96
i_batch:  0.0
the loss for this batch:  0.19353382
flow loss 0.071439035
occ loss 0.12209086
time for this batch 0.24895811080932617
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16096519
flow loss 0.065679185
occ loss 0.095282644
time for this batch 0.29561424255371094
----------------------------------
train loss for this epoch:  0.181497
time for this epoch 34.79190969467163
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  50
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17891356
flow loss 0.06977864
occ loss 0.10913079
time for this batch 0.27708888053894043
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22062956
flow loss 0.07867352
occ loss 0.14195234
time for this batch 0.28504514694213867
----------------------------------
train loss for this epoch:  0.182917
time for this epoch 36.49173927307129
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  51
# batch:  96
i_batch:  0.0
the loss for this batch:  0.19528167
flow loss 0.07545088
occ loss 0.11982691
time for this batch 0.21551203727722168
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19392785
flow loss 0.07273917
occ loss 0.121184886
time for this batch 0.30405616760253906
----------------------------------
train loss for this epoch:  0.181306
time for this epoch 36.255181074142456
No_decrease:  6
----------------an epoch starts-------------------
i_epoch:  52
# batch:  96
i_batch:  0.0
the loss for this batch:  0.2058007
flow loss 0.0760668
occ loss 0.12973003
time for this batch 0.2696704864501953
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17143835
flow loss 0.07023072
occ loss 0.1012038
time for this batch 0.3026313781738281
----------------------------------
train loss for this epoch:  0.182572
time for this epoch 37.11858582496643
No_decrease:  7
----------------an epoch starts-------------------
i_epoch:  53
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16025512
flow loss 0.061260022
occ loss 0.09899191
time for this batch 0.26891517639160156
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1907624
flow loss 0.07023854
occ loss 0.120520286
time for this batch 0.2834939956665039
----------------------------------
train loss for this epoch:  0.1833
time for this epoch 36.923429012298584
No_decrease:  8
----------------an epoch starts-------------------
i_epoch:  54
# batch:  96
i_batch:  0.0
the loss for this batch:  0.18750988
flow loss 0.06658815
occ loss 0.12091818
time for this batch 0.25446319580078125
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16232912
flow loss 0.06936911
occ loss 0.09295712
time for this batch 0.295015811920166
----------------------------------
train loss for this epoch:  0.181848
time for this epoch 36.19132423400879
No_decrease:  9
----------------an epoch starts-------------------
i_epoch:  55
# batch:  96
i_batch:  0.0
the loss for this batch:  0.19813064
flow loss 0.0750558
occ loss 0.123070404
time for this batch 0.26606011390686035
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18697028
flow loss 0.08118334
occ loss 0.10578339
time for this batch 0.295196533203125
----------------------------------
train loss for this epoch:  0.180924
time for this epoch 36.61938738822937
No_decrease:  10
----------------an epoch starts-------------------
i_epoch:  56
# batch:  96
i_batch:  0.0
the loss for this batch:  0.13539164
flow loss 0.059953917
occ loss 0.07543505
time for this batch 0.27460575103759766
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16657723
flow loss 0.06371121
occ loss 0.102862574
time for this batch 0.3220505714416504
----------------------------------
train loss for this epoch:  0.180049
time for this epoch 36.337857246398926
No_decrease:  11
----------------an epoch starts-------------------
i_epoch:  57
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16193925
flow loss 0.06744045
occ loss 0.09449542
time for this batch 0.26869654655456543
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1587363
flow loss 0.0674629
occ loss 0.09126965
time for this batch 0.3044757843017578
----------------------------------
train loss for this epoch:  0.178834
time for this epoch 36.10396647453308
No_decrease:  12
----------------an epoch starts-------------------
i_epoch:  58
# batch:  96
i_batch:  0.0
the loss for this batch:  0.18001497
flow loss 0.071580656
occ loss 0.10843017
time for this batch 0.26924848556518555
----------------------------------
i_batch:  64.0
the loss for this batch:  0.24322592
flow loss 0.08231777
occ loss 0.16090374
time for this batch 0.298065185546875
----------------------------------
train loss for this epoch:  0.180088
time for this epoch 36.44292593002319
No_decrease:  13
----------------an epoch starts-------------------
i_epoch:  59
# batch:  96
i_batch:  0.0
the loss for this batch:  0.21036474
flow loss 0.08087902
occ loss 0.12948212
time for this batch 0.25804638862609863
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1726955
flow loss 0.071148194
occ loss 0.1015436
time for this batch 0.30953478813171387
----------------------------------
train loss for this epoch:  0.178833
time for this epoch 36.552202463150024
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  60
# batch:  96
i_batch:  0.0
the loss for this batch:  0.18909642
flow loss 0.07482378
occ loss 0.11426877
time for this batch 0.26317644119262695
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16021167
flow loss 0.06262844
occ loss 0.09758005
time for this batch 0.27545833587646484
----------------------------------
train loss for this epoch:  0.177935
time for this epoch 36.789323806762695
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  61
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1753531
flow loss 0.07049825
occ loss 0.10485097
time for this batch 0.2588675022125244
----------------------------------
i_batch:  64.0
the loss for this batch:  0.23180507
flow loss 0.08119468
occ loss 0.15060562
time for this batch 0.3041698932647705
----------------------------------
train loss for this epoch:  0.177658
time for this epoch 36.531075954437256
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  62
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16483021
flow loss 0.06810846
occ loss 0.09671803
time for this batch 0.2862701416015625
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1707902
flow loss 0.070452645
occ loss 0.100334
time for this batch 0.30980873107910156
----------------------------------
train loss for this epoch:  0.178709
time for this epoch 37.32961463928223
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  63
# batch:  96
i_batch:  0.0
the loss for this batch:  0.19101085
flow loss 0.07643664
occ loss 0.11457002
time for this batch 0.26540207862854004
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17375998
flow loss 0.07352018
occ loss 0.10023622
time for this batch 0.26316332817077637
----------------------------------
train loss for this epoch:  0.177491
time for this epoch 37.00841212272644
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  64
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16290376
flow loss 0.06458205
occ loss 0.0983183
time for this batch 0.22385144233703613
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19531809
flow loss 0.076790966
occ loss 0.118523195
time for this batch 0.3075220584869385
----------------------------------
train loss for this epoch:  0.177791
time for this epoch 36.1899778842926
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  65
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1723732
flow loss 0.06427186
occ loss 0.1080977
time for this batch 0.27466535568237305
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1588903
flow loss 0.06846731
occ loss 0.09041987
time for this batch 0.31441783905029297
----------------------------------
train loss for this epoch:  0.177847
time for this epoch 35.817121505737305
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  66
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15411407
flow loss 0.06406804
occ loss 0.09004247
time for this batch 0.2798013687133789
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15341407
flow loss 0.06523414
occ loss 0.08817645
time for this batch 0.2758316993713379
----------------------------------
train loss for this epoch:  0.176361
time for this epoch 36.92623257637024
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  67
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16533963
flow loss 0.06229554
occ loss 0.10304066
time for this batch 0.23973584175109863
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18025367
flow loss 0.069517314
occ loss 0.11073259
time for this batch 0.26734161376953125
----------------------------------
train loss for this epoch:  0.175547
time for this epoch 35.93621826171875
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  68
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14675237
flow loss 0.057074748
occ loss 0.08967435
time for this batch 0.39333009719848633
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20825483
flow loss 0.078486644
occ loss 0.12976412
time for this batch 0.2895548343658447
----------------------------------
train loss for this epoch:  0.174556
time for this epoch 36.998363971710205
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  69
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1691134
flow loss 0.06688292
occ loss 0.10222689
time for this batch 0.25583958625793457
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1819689
flow loss 0.06839092
occ loss 0.11357445
time for this batch 0.3097679615020752
----------------------------------
train loss for this epoch:  0.175689
time for this epoch 36.78766870498657
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  70
# batch:  96
i_batch:  0.0
the loss for this batch:  0.21485184
flow loss 0.07621759
occ loss 0.13862966
time for this batch 0.29839277267456055
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18309729
flow loss 0.068571106
occ loss 0.11452207
time for this batch 0.29604268074035645
----------------------------------
train loss for this epoch:  0.175422
time for this epoch 36.98432946205139
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  71
# batch:  96
i_batch:  0.0
the loss for this batch:  0.23107934
flow loss 0.07968146
occ loss 0.15139344
time for this batch 0.2674062252044678
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20460348
flow loss 0.07571739
occ loss 0.12888229
time for this batch 0.2894108295440674
----------------------------------
train loss for this epoch:  0.17474
time for this epoch 37.167922496795654
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  72
# batch:  96
i_batch:  0.0
the loss for this batch:  0.13783316
flow loss 0.060990963
occ loss 0.076838985
time for this batch 0.24402976036071777
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19373538
flow loss 0.07186142
occ loss 0.12187013
time for this batch 0.2826354503631592
----------------------------------
train loss for this epoch:  0.175583
time for this epoch 35.80727791786194
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  73
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1263726
flow loss 0.052411254
occ loss 0.073959
time for this batch 0.23323559761047363
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1419653
flow loss 0.060238235
occ loss 0.08172379
time for this batch 0.2734816074371338
----------------------------------
train loss for this epoch:  0.173186
time for this epoch 35.973103523254395
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  74
# batch:  96
i_batch:  0.0
the loss for this batch:  0.20644625
flow loss 0.072972134
occ loss 0.1334702
time for this batch 0.2493000030517578
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1816346
flow loss 0.06511269
occ loss 0.116518065
time for this batch 0.2941913604736328
----------------------------------
train loss for this epoch:  0.172934
time for this epoch 37.26864814758301
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  75
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14022571
flow loss 0.06110737
occ loss 0.07911513
time for this batch 0.22477436065673828
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17298329
flow loss 0.06666909
occ loss 0.106310464
time for this batch 0.3136296272277832
----------------------------------
train loss for this epoch:  0.174291
time for this epoch 36.403735637664795
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  76
# batch:  96
i_batch:  0.0
the loss for this batch:  0.13878988
flow loss 0.057158813
occ loss 0.08162786
time for this batch 0.25097084045410156
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18907057
flow loss 0.07163599
occ loss 0.117430665
time for this batch 0.3024766445159912
----------------------------------
train loss for this epoch:  0.172399
time for this epoch 36.01527237892151
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  77
# batch:  96
i_batch:  0.0
the loss for this batch:  0.20286588
flow loss 0.074485816
occ loss 0.12837599
time for this batch 0.2635650634765625
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18744698
flow loss 0.06945291
occ loss 0.11798964
time for this batch 0.2990877628326416
----------------------------------
train loss for this epoch:  0.173956
time for this epoch 36.90428328514099
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  78
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17089725
flow loss 0.064613916
occ loss 0.106279776
time for this batch 0.2455596923828125
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14160351
flow loss 0.058686413
occ loss 0.08291394
time for this batch 0.3081514835357666
----------------------------------
train loss for this epoch:  0.172542
time for this epoch 35.707332611083984
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  79
# batch:  96
i_batch:  0.0
the loss for this batch:  0.21988234
flow loss 0.075423725
occ loss 0.14445381
time for this batch 0.266132116317749
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19545025
flow loss 0.07247921
occ loss 0.12296689
time for this batch 0.2970151901245117
----------------------------------
train loss for this epoch:  0.172347
time for this epoch 35.4137179851532
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  80
# batch:  96
i_batch:  0.0
the loss for this batch:  0.20831278
flow loss 0.072078645
occ loss 0.13623023
time for this batch 0.2477560043334961
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19094275
flow loss 0.073169224
occ loss 0.117769696
time for this batch 0.29091453552246094
----------------------------------
train loss for this epoch:  0.171351
time for this epoch 35.82119607925415
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  81
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16882126
flow loss 0.06315825
occ loss 0.105659105
time for this batch 0.26991724967956543
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19282202
flow loss 0.072176196
occ loss 0.1206419
time for this batch 0.29759764671325684
----------------------------------
train loss for this epoch:  0.171169
time for this epoch 35.67663788795471
No_decrease:  6
----------------an epoch starts-------------------
i_epoch:  82
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16017881
flow loss 0.06233093
occ loss 0.09784419
time for this batch 0.2773168087005615
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17233604
flow loss 0.06445405
occ loss 0.10787841
time for this batch 0.29906630516052246
----------------------------------
train loss for this epoch:  0.172369
time for this epoch 36.35960268974304
No_decrease:  7
----------------an epoch starts-------------------
i_epoch:  83
# batch:  96
i_batch:  0.0
the loss for this batch:  0.18449962
flow loss 0.069640696
occ loss 0.114855066
time for this batch 0.27439379692077637
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17863105
flow loss 0.06338445
occ loss 0.11524329
time for this batch 0.3052985668182373
----------------------------------
train loss for this epoch:  0.170522
time for this epoch 35.961769104003906
No_decrease:  8
----------------an epoch starts-------------------
i_epoch:  84
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16241603
flow loss 0.06163134
occ loss 0.100781
time for this batch 0.24818849563598633
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15231797
flow loss 0.06019659
occ loss 0.09211821
time for this batch 0.27852916717529297
----------------------------------
train loss for this epoch:  0.169224
time for this epoch 35.969027280807495
No_decrease:  9
----------------an epoch starts-------------------
i_epoch:  85
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14543402
flow loss 0.0646271
occ loss 0.0808033
time for this batch 0.2744300365447998
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1657066
flow loss 0.06162411
occ loss 0.10407907
time for this batch 0.24903464317321777
----------------------------------
train loss for this epoch:  0.169875
time for this epoch 35.53133964538574
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  86
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15730384
flow loss 0.058478154
occ loss 0.098822385
time for this batch 0.2624833583831787
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14278927
flow loss 0.0584066
occ loss 0.08437944
time for this batch 0.29090261459350586
----------------------------------
train loss for this epoch:  0.169951
time for this epoch 36.399160861968994
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  87
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16516238
flow loss 0.06617231
occ loss 0.098986454
time for this batch 0.26729822158813477
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17038675
flow loss 0.0669805
occ loss 0.10340261
time for this batch 0.2955605983734131
----------------------------------
train loss for this epoch:  0.169764
time for this epoch 36.39837098121643
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  88
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16472793
flow loss 0.06453218
occ loss 0.10019229
time for this batch 0.24997782707214355
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19479768
flow loss 0.07145292
occ loss 0.12334092
time for this batch 0.2692372798919678
----------------------------------
train loss for this epoch:  0.168968
time for this epoch 35.8499972820282
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  89
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1384904
flow loss 0.06287128
occ loss 0.075615786
time for this batch 0.2775707244873047
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1836156
flow loss 0.0682636
occ loss 0.11534828
time for this batch 0.2675633430480957
----------------------------------
train loss for this epoch:  0.168052
time for this epoch 36.95125341415405
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  90
# batch:  96
i_batch:  0.0
the loss for this batch:  0.18496582
flow loss 0.065818235
occ loss 0.11914395
time for this batch 0.24342751502990723
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17197001
flow loss 0.0641842
occ loss 0.10778219
time for this batch 0.28157806396484375
----------------------------------
train loss for this epoch:  0.168625
time for this epoch 36.20490312576294
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  91
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16162637
flow loss 0.06214443
occ loss 0.09947826
time for this batch 0.27723097801208496
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1792115
flow loss 0.06685048
occ loss 0.11235764
time for this batch 0.31040096282958984
----------------------------------
train loss for this epoch:  0.168336
time for this epoch 36.55529236793518
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  92
# batch:  96
i_batch:  0.0
the loss for this batch:  0.12729232
flow loss 0.055840142
occ loss 0.07144938
time for this batch 0.26594018936157227
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18577763
flow loss 0.06695891
occ loss 0.118814744
time for this batch 0.310391902923584
----------------------------------
train loss for this epoch:  0.167042
time for this epoch 36.72194170951843
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  93
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1689393
flow loss 0.07017335
occ loss 0.09876239
time for this batch 0.272475004196167
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14272101
flow loss 0.06409043
occ loss 0.07862693
time for this batch 0.30746960639953613
----------------------------------
train loss for this epoch:  0.167704
time for this epoch 36.28701853752136
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  94
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17403838
flow loss 0.06892745
occ loss 0.10510692
time for this batch 0.27414655685424805
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15586485
flow loss 0.06114303
occ loss 0.09471817
time for this batch 0.3116471767425537
----------------------------------
train loss for this epoch:  0.16856
time for this epoch 35.48454737663269
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  95
# batch:  96
i_batch:  0.0
the loss for this batch:  0.13571046
flow loss 0.061258968
occ loss 0.074448146
time for this batch 0.2691929340362549
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16419896
flow loss 0.060844686
occ loss 0.103350736
time for this batch 0.27648019790649414
----------------------------------
train loss for this epoch:  0.166094
time for this epoch 36.216663122177124
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  96
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17795599
flow loss 0.064249836
occ loss 0.11370232
time for this batch 0.27649688720703125
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15249228
flow loss 0.06364626
occ loss 0.08884232
time for this batch 0.19113397598266602
----------------------------------
train loss for this epoch:  0.167743
time for this epoch 36.36919689178467
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  97
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14917515
flow loss 0.06578784
occ loss 0.08338444
time for this batch 0.2839319705963135
----------------------------------
i_batch:  64.0
the loss for this batch:  0.13444562
flow loss 0.05417226
occ loss 0.08027055
time for this batch 0.2982370853424072
----------------------------------
train loss for this epoch:  0.16734
time for this epoch 36.417330741882324
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  98
# batch:  96
i_batch:  0.0
the loss for this batch:  0.21979143
flow loss 0.06818233
occ loss 0.15160508
time for this batch 0.2704615592956543
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1771244
flow loss 0.06841209
occ loss 0.10870842
time for this batch 0.32030415534973145
----------------------------------
train loss for this epoch:  0.167038
time for this epoch 37.29658889770508
No_decrease:  6
----------------an epoch starts-------------------
i_epoch:  99
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1925268
flow loss 0.07838245
occ loss 0.114140585
time for this batch 0.2622978687286377
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18882476
flow loss 0.06906082
occ loss 0.119760185
time for this batch 0.29944682121276855
----------------------------------
train loss for this epoch:  0.166624
time for this epoch 36.36146926879883
No_decrease:  7
----------------an epoch starts-------------------
i_epoch:  100
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1792474
flow loss 0.06926883
occ loss 0.109975085
time for this batch 0.27871274948120117
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14372413
flow loss 0.06029646
occ loss 0.083424725
time for this batch 0.24602127075195312
----------------------------------
train loss for this epoch:  0.167185
time for this epoch 35.5762300491333
No_decrease:  8
----------------an epoch starts-------------------
i_epoch:  101
# batch:  96
i_batch:  0.0
the loss for this batch:  0.13339686
flow loss 0.056155924
occ loss 0.07723803
time for this batch 0.2646925449371338
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18425202
flow loss 0.06316422
occ loss 0.12108395
time for this batch 0.26790809631347656
----------------------------------
train loss for this epoch:  0.165898
time for this epoch 36.36387753486633
No_decrease:  9
----------------an epoch starts-------------------
i_epoch:  102
# batch:  96
i_batch:  0.0
the loss for this batch:  0.19650291
flow loss 0.068439886
occ loss 0.12805936
time for this batch 0.2657010555267334
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14620928
flow loss 0.05736331
occ loss 0.0888426
time for this batch 0.3060128688812256
----------------------------------
train loss for this epoch:  0.164963
time for this epoch 36.56157612800598
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  103
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14426914
flow loss 0.05403943
occ loss 0.09022621
time for this batch 0.25715208053588867
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14763078
flow loss 0.065690376
occ loss 0.08193676
time for this batch 0.293501615524292
----------------------------------
train loss for this epoch:  0.165213
time for this epoch 36.5147705078125
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  104
# batch:  96
i_batch:  0.0
the loss for this batch:  0.18624297
flow loss 0.06886202
occ loss 0.11737693
time for this batch 0.279909610748291
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15574357
flow loss 0.05956923
occ loss 0.09617124
time for this batch 0.28909850120544434
----------------------------------
train loss for this epoch:  0.165359
time for this epoch 36.672971963882446
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  105
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16480617
flow loss 0.06395461
occ loss 0.1008476
time for this batch 0.2650613784790039
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18722355
flow loss 0.06994075
occ loss 0.11727901
time for this batch 0.3086271286010742
----------------------------------
train loss for this epoch:  0.167727
time for this epoch 36.103089332580566
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  106
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17915331
flow loss 0.06119725
occ loss 0.117951944
time for this batch 0.2696554660797119
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14060386
flow loss 0.057017297
occ loss 0.08358305
time for this batch 0.3029000759124756
----------------------------------
train loss for this epoch:  0.163972
time for this epoch 36.27958703041077
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  107
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1450024
flow loss 0.05720012
occ loss 0.0877989
time for this batch 0.24257516860961914
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18908204
flow loss 0.0646449
occ loss 0.124432966
time for this batch 0.29509830474853516
----------------------------------
train loss for this epoch:  0.163483
time for this epoch 36.98398733139038
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  108
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14166337
flow loss 0.059420567
occ loss 0.08223919
time for this batch 0.2397596836090088
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14898847
flow loss 0.06551082
occ loss 0.08347442
time for this batch 0.29529261589050293
----------------------------------
train loss for this epoch:  0.163985
time for this epoch 35.97303080558777
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  109
# batch:  96
i_batch:  0.0
the loss for this batch:  0.18040887
flow loss 0.07181726
occ loss 0.10858748
time for this batch 0.275068998336792
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16489393
flow loss 0.06004269
occ loss 0.10484768
time for this batch 0.2732245922088623
----------------------------------
train loss for this epoch:  0.166312
time for this epoch 35.55239748954773
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  110
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17920618
flow loss 0.062450223
occ loss 0.1167522
time for this batch 0.29524993896484375
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1409356
flow loss 0.058668554
occ loss 0.082263716
time for this batch 0.31198740005493164
----------------------------------
train loss for this epoch:  0.162209
time for this epoch 36.33540749549866
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  111
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16470754
flow loss 0.06564003
occ loss 0.09906371
time for this batch 0.30994153022766113
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19903867
flow loss 0.0701022
occ loss 0.12893248
time for this batch 0.2807607650756836
----------------------------------
train loss for this epoch:  0.164793
time for this epoch 35.534032106399536
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  112
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17721799
flow loss 0.06598762
occ loss 0.111226484
time for this batch 0.27225279808044434
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15032166
flow loss 0.06411424
occ loss 0.0862041
time for this batch 0.3135640621185303
----------------------------------
train loss for this epoch:  0.164446
time for this epoch 36.557472229003906
No_decrease:  6
----------------an epoch starts-------------------
i_epoch:  113
# batch:  96
i_batch:  0.0
the loss for this batch:  0.18643053
flow loss 0.073722966
occ loss 0.112703495
time for this batch 0.28002142906188965
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14574267
flow loss 0.06084684
occ loss 0.084892206
time for this batch 0.2647273540496826
----------------------------------
train loss for this epoch:  0.164524
time for this epoch 35.44134783744812
No_decrease:  7
----------------an epoch starts-------------------
i_epoch:  114
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15067878
flow loss 0.062534206
occ loss 0.08814162
time for this batch 0.22495460510253906
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15089117
flow loss 0.060993984
occ loss 0.0898941
time for this batch 0.2825770378112793
----------------------------------
train loss for this epoch:  0.163745
time for this epoch 35.87050747871399
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  115
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15983786
flow loss 0.060121026
occ loss 0.09971274
time for this batch 0.2091066837310791
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17609261
flow loss 0.06151663
occ loss 0.11457251
time for this batch 0.2752385139465332
----------------------------------
train loss for this epoch:  0.162677
time for this epoch 36.384774684906006
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  116
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1599124
flow loss 0.06316183
occ loss 0.0967466
time for this batch 0.25289082527160645
----------------------------------
i_batch:  64.0
the loss for this batch:  0.12841833
flow loss 0.055345513
occ loss 0.07306963
time for this batch 0.30292677879333496
----------------------------------
train loss for this epoch:  0.162527
time for this epoch 34.71121263504028
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  117
# batch:  96
i_batch:  0.0
the loss for this batch:  0.19199927
flow loss 0.06499395
occ loss 0.12700103
time for this batch 0.26561594009399414
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18802883
flow loss 0.066949144
occ loss 0.1210755
time for this batch 0.3181309700012207
----------------------------------
train loss for this epoch:  0.162946
time for this epoch 36.49592852592468
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  118
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15289262
flow loss 0.0609229
occ loss 0.091966294
time for this batch 0.2164320945739746
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16829242
flow loss 0.058662936
occ loss 0.10962578
time for this batch 0.29235148429870605
----------------------------------
train loss for this epoch:  0.162736
time for this epoch 36.584577798843384
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  119
# batch:  96
i_batch:  0.0
the loss for this batch:  0.21054873
flow loss 0.068971455
occ loss 0.14157327
time for this batch 0.2836272716522217
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14755294
flow loss 0.055179562
occ loss 0.09236987
time for this batch 0.31075525283813477
----------------------------------
train loss for this epoch:  0.163116
time for this epoch 36.272130250930786
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  120
# batch:  96
i_batch:  0.0
the loss for this batch:  0.18151855
flow loss 0.06581288
occ loss 0.11570166
time for this batch 0.2699167728424072
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1441034
flow loss 0.05207712
occ loss 0.09202325
time for this batch 0.3059103488922119
----------------------------------
train loss for this epoch:  0.161799
time for this epoch 36.46512460708618
No_decrease:  6
----------------an epoch starts-------------------
i_epoch:  121
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15202309
flow loss 0.056452837
occ loss 0.0955667
time for this batch 0.2783970832824707
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17544095
flow loss 0.06358225
occ loss 0.11185445
time for this batch 0.29420018196105957
----------------------------------
train loss for this epoch:  0.162114
time for this epoch 36.26199746131897
No_decrease:  7
----------------an epoch starts-------------------
i_epoch:  122
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17885666
flow loss 0.064737014
occ loss 0.11411582
time for this batch 0.23047900199890137
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19230288
flow loss 0.07005014
occ loss 0.12224901
time for this batch 0.29169297218322754
----------------------------------
train loss for this epoch:  0.162112
time for this epoch 36.30904674530029
No_decrease:  8
----------------an epoch starts-------------------
i_epoch:  123
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17073213
flow loss 0.061045486
occ loss 0.10968286
time for this batch 0.2779092788696289
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15849109
flow loss 0.06268661
occ loss 0.09580027
time for this batch 0.283616304397583
----------------------------------
train loss for this epoch:  0.163628
time for this epoch 35.89368510246277
No_decrease:  9
----------------an epoch starts-------------------
i_epoch:  124
# batch:  96
i_batch:  0.0
the loss for this batch:  0.13628562
flow loss 0.055544436
occ loss 0.080737926
time for this batch 0.27358531951904297
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20460193
flow loss 0.07796458
occ loss 0.12663339
time for this batch 0.3051295280456543
----------------------------------
train loss for this epoch:  0.161929
time for this epoch 36.47081518173218
No_decrease:  10
----------------an epoch starts-------------------
i_epoch:  125
# batch:  96
i_batch:  0.0
the loss for this batch:  0.19193465
flow loss 0.063060544
occ loss 0.12887041
time for this batch 0.2526824474334717
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16662036
flow loss 0.061257236
occ loss 0.105359666
time for this batch 0.2859337329864502
----------------------------------
train loss for this epoch:  0.161446
time for this epoch 34.478336811065674
No_decrease:  11
----------------an epoch starts-------------------
i_epoch:  126
# batch:  96
i_batch:  0.0
the loss for this batch:  0.123939276
flow loss 0.054180507
occ loss 0.06975557
time for this batch 0.27155447006225586
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14539194
flow loss 0.055904627
occ loss 0.08948406
time for this batch 0.2542872428894043
----------------------------------
train loss for this epoch:  0.160568
time for this epoch 34.25938701629639
No_decrease:  12
----------------an epoch starts-------------------
i_epoch:  127
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16881533
flow loss 0.06177378
occ loss 0.107037924
time for this batch 0.2815520763397217
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1581173
flow loss 0.05718545
occ loss 0.10092878
time for this batch 0.2951207160949707
----------------------------------
train loss for this epoch:  0.160911
time for this epoch 35.72840070724487
No_decrease:  13
----------------an epoch starts-------------------
i_epoch:  128
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15884277
flow loss 0.059885863
occ loss 0.09895339
time for this batch 0.27852773666381836
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16874881
flow loss 0.05865233
occ loss 0.11009253
time for this batch 0.30559730529785156
----------------------------------
train loss for this epoch:  0.161302
time for this epoch 36.105477809906006
No_decrease:  14
----------------an epoch starts-------------------
i_epoch:  129
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1437167
flow loss 0.058015224
occ loss 0.08569791
time for this batch 0.42058491706848145
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16073495
flow loss 0.061587088
occ loss 0.0991444
time for this batch 0.28392839431762695
----------------------------------
train loss for this epoch:  0.161577
time for this epoch 37.649142265319824
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  130
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16021949
flow loss 0.057391483
occ loss 0.10282437
time for this batch 0.279510498046875
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19075653
flow loss 0.06747203
occ loss 0.12328041
time for this batch 0.31781458854675293
----------------------------------
train loss for this epoch:  0.161373
time for this epoch 36.5324330329895
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  131
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17288816
flow loss 0.06324413
occ loss 0.10964043
time for this batch 0.27089977264404297
----------------------------------
i_batch:  64.0
the loss for this batch:  0.118350185
flow loss 0.04942366
occ loss 0.06892384
time for this batch 0.31757402420043945
----------------------------------
train loss for this epoch:  0.160112
time for this epoch 36.23045492172241
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  132
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1839291
flow loss 0.06783835
occ loss 0.116086505
time for this batch 0.26749324798583984
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1985095
flow loss 0.07016828
occ loss 0.12833713
time for this batch 0.31740641593933105
----------------------------------
train loss for this epoch:  0.159707
time for this epoch 35.9177360534668
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  133
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14249225
flow loss 0.05624874
occ loss 0.08624006
time for this batch 0.25034546852111816
----------------------------------
i_batch:  64.0
the loss for this batch:  0.13436459
flow loss 0.056321207
occ loss 0.0780398
time for this batch 0.30282115936279297
----------------------------------
train loss for this epoch:  0.161514
time for this epoch 36.86523675918579
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  134
# batch:  96
i_batch:  0.0
the loss for this batch:  0.2004257
flow loss 0.06860453
occ loss 0.13181691
time for this batch 0.2717757225036621
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15602645
flow loss 0.062557235
occ loss 0.09346588
time for this batch 0.2917780876159668
----------------------------------
train loss for this epoch:  0.160199
time for this epoch 36.49582576751709
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  135
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14085734
flow loss 0.060759235
occ loss 0.0800945
time for this batch 0.2746298313140869
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17397623
flow loss 0.06131994
occ loss 0.11265256
time for this batch 0.30915260314941406
----------------------------------
train loss for this epoch:  0.159597
time for this epoch 35.78676915168762
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  136
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15552105
flow loss 0.061358456
occ loss 0.09415852
time for this batch 0.26572299003601074
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18917036
flow loss 0.0652914
occ loss 0.12387487
time for this batch 0.30899477005004883
----------------------------------
train loss for this epoch:  0.159136
time for this epoch 36.68729376792908
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  137
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17826378
flow loss 0.06304875
occ loss 0.115210935
time for this batch 0.27826428413391113
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15807465
flow loss 0.060441405
occ loss 0.097629264
time for this batch 0.28302931785583496
----------------------------------
train loss for this epoch:  0.161887
time for this epoch 34.945910930633545
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  138
# batch:  96
i_batch:  0.0
the loss for this batch:  0.12794045
flow loss 0.056345534
occ loss 0.07159142
time for this batch 0.2645735740661621
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16976996
flow loss 0.06251365
occ loss 0.107252404
time for this batch 0.28904271125793457
----------------------------------
train loss for this epoch:  0.159296
time for this epoch 36.909430742263794
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  139
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1417117
flow loss 0.058905184
occ loss 0.082803264
time for this batch 0.2606220245361328
----------------------------------
i_batch:  64.0
the loss for this batch:  0.175949
flow loss 0.061806288
occ loss 0.1141386
time for this batch 0.30467891693115234
----------------------------------
train loss for this epoch:  0.159963
time for this epoch 34.58863615989685
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  140
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14252816
flow loss 0.06113178
occ loss 0.08139283
time for this batch 0.2747182846069336
----------------------------------
i_batch:  64.0
the loss for this batch:  0.12418927
flow loss 0.052286383
occ loss 0.0719
time for this batch 0.27669858932495117
----------------------------------
train loss for this epoch:  0.159624
time for this epoch 35.7971715927124
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  141
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15764715
flow loss 0.05971233
occ loss 0.09793154
time for this batch 0.2818324565887451
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16641568
flow loss 0.06204312
occ loss 0.104368776
time for this batch 0.3014504909515381
----------------------------------
train loss for this epoch:  0.159577
time for this epoch 34.8903706073761
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  142
# batch:  96
i_batch:  0.0
the loss for this batch:  0.146142
flow loss 0.055916335
occ loss 0.09022186
time for this batch 0.2674839496612549
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18731645
flow loss 0.06892279
occ loss 0.11839003
time for this batch 0.3254125118255615
----------------------------------
train loss for this epoch:  0.159566
time for this epoch 35.38903331756592
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  143
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14505488
flow loss 0.062865175
occ loss 0.08218622
time for this batch 0.2435321807861328
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16844288
flow loss 0.06348314
occ loss 0.10495596
time for this batch 0.29888248443603516
----------------------------------
train loss for this epoch:  0.158611
time for this epoch 35.91069006919861
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  144
# batch:  96
i_batch:  0.0
the loss for this batch:  0.21453293
flow loss 0.0684124
occ loss 0.14611627
time for this batch 0.27896666526794434
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1517582
flow loss 0.05938289
occ loss 0.09237152
time for this batch 0.2793691158294678
----------------------------------
train loss for this epoch:  0.159632
time for this epoch 35.34902381896973
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  145
# batch:  96
i_batch:  0.0
the loss for this batch:  0.18041235
flow loss 0.06303583
occ loss 0.11737269
time for this batch 0.2442793846130371
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1606298
flow loss 0.060738277
occ loss 0.09988796
time for this batch 0.26990580558776855
----------------------------------
train loss for this epoch:  0.158592
time for this epoch 35.56690835952759
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  146
# batch:  96
i_batch:  0.0
the loss for this batch:  0.12781459
flow loss 0.05256058
occ loss 0.07525085
time for this batch 0.23339438438415527
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14040744
flow loss 0.054408
occ loss 0.085996084
time for this batch 0.30510544776916504
----------------------------------
train loss for this epoch:  0.158164
time for this epoch 35.131457805633545
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  147
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14862797
flow loss 0.055819783
occ loss 0.092805006
time for this batch 0.2700660228729248
----------------------------------
i_batch:  64.0
the loss for this batch:  0.192496
flow loss 0.06613547
occ loss 0.12635626
time for this batch 0.31427884101867676
----------------------------------
train loss for this epoch:  0.158034
time for this epoch 36.332279682159424
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  148
# batch:  96
i_batch:  0.0
the loss for this batch:  0.19374706
flow loss 0.066885725
occ loss 0.12685724
time for this batch 0.24476408958435059
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15797669
flow loss 0.061485022
occ loss 0.09648786
time for this batch 0.27745485305786133
----------------------------------
train loss for this epoch:  0.158299
time for this epoch 35.10307860374451
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  149
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14578533
flow loss 0.054816984
occ loss 0.09096462
time for this batch 0.2791123390197754
----------------------------------
i_batch:  64.0
the loss for this batch:  0.11963681
flow loss 0.050678685
occ loss 0.06895516
time for this batch 0.2762587070465088
----------------------------------
train loss for this epoch:  0.158567
time for this epoch 35.49077749252319
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  150
# batch:  96
i_batch:  0.0
the loss for this batch:  0.12810622
flow loss 0.05794647
occ loss 0.070156336
time for this batch 0.2563741207122803
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16514343
flow loss 0.05682362
occ loss 0.10831585
time for this batch 0.28494906425476074
----------------------------------
train loss for this epoch:  0.151692
time for this epoch 36.827033281326294
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  151
# batch:  96
i_batch:  0.0
the loss for this batch:  0.13217722
flow loss 0.055311784
occ loss 0.076861866
time for this batch 0.2642040252685547
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15887071
flow loss 0.05993556
occ loss 0.09893129
time for this batch 0.27207446098327637
----------------------------------
train loss for this epoch:  0.149474
time for this epoch 34.97927927970886
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  152
# batch:  96
i_batch:  0.0
the loss for this batch:  0.123123184
flow loss 0.0505635
occ loss 0.072556406
time for this batch 0.24546146392822266
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16923644
flow loss 0.057722855
occ loss 0.11150954
time for this batch 0.28682899475097656
----------------------------------
train loss for this epoch:  0.149566
time for this epoch 35.53329157829285
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  153
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15054187
flow loss 0.05438765
occ loss 0.09615054
time for this batch 0.2723958492279053
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15965675
flow loss 0.058533467
occ loss 0.101119444
time for this batch 0.2933378219604492
----------------------------------
train loss for this epoch:  0.149114
time for this epoch 35.965455055236816
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  154
# batch:  96
i_batch:  0.0
the loss for this batch:  0.13708119
flow loss 0.056724064
occ loss 0.08035376
time for this batch 0.27086973190307617
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15803601
flow loss 0.055397097
occ loss 0.102635324
time for this batch 0.31653475761413574
----------------------------------
train loss for this epoch:  0.149293
time for this epoch 36.268746852874756
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  155
# batch:  96
i_batch:  0.0
the loss for this batch:  0.09862001
flow loss 0.04666235
occ loss 0.05195507
time for this batch 0.3024623394012451
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16026913
flow loss 0.057780832
occ loss 0.10248493
time for this batch 0.3174855709075928
----------------------------------
train loss for this epoch:  0.148643
time for this epoch 36.87047815322876
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  156
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15572418
flow loss 0.057452306
occ loss 0.098268464
time for this batch 0.27608227729797363
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1635799
flow loss 0.056331094
occ loss 0.107244976
time for this batch 0.27466535568237305
----------------------------------
train loss for this epoch:  0.148443
time for this epoch 35.80942678451538
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  157
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14581478
flow loss 0.054201234
occ loss 0.09160999
time for this batch 0.27735304832458496
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19828668
flow loss 0.065415576
occ loss 0.13286671
time for this batch 0.2568080425262451
----------------------------------
train loss for this epoch:  0.148535
time for this epoch 35.459089517593384
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  158
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14719172
flow loss 0.054666243
occ loss 0.092522085
time for this batch 0.2786402702331543
----------------------------------
i_batch:  64.0
the loss for this batch:  0.13834289
flow loss 0.055291
occ loss 0.08304844
time for this batch 0.30443739891052246
----------------------------------
train loss for this epoch:  0.148349
time for this epoch 36.783923387527466
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  159
# batch:  96
i_batch:  0.0
the loss for this batch:  0.160526
flow loss 0.05679873
occ loss 0.1037235
time for this batch 0.27070021629333496
----------------------------------
i_batch:  64.0
the loss for this batch:  0.13834481
flow loss 0.054114923
occ loss 0.08422666
time for this batch 0.31406617164611816
----------------------------------
train loss for this epoch:  0.148126
time for this epoch 35.626418590545654
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  160
# batch:  96
i_batch:  0.0
the loss for this batch:  0.13770647
flow loss 0.055006906
occ loss 0.0826959
time for this batch 0.28197598457336426
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17843185
flow loss 0.059375595
occ loss 0.11905219
time for this batch 0.27596330642700195
----------------------------------
train loss for this epoch:  0.14823
time for this epoch 36.949193477630615
No_decrease:  6
----------------an epoch starts-------------------
i_epoch:  161
# batch:  96
i_batch:  0.0
the loss for this batch:  0.12057901
flow loss 0.049236186
occ loss 0.07133965
time for this batch 0.2742323875427246
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14266214
flow loss 0.0536653
occ loss 0.08899361
time for this batch 0.25426793098449707
----------------------------------
train loss for this epoch:  0.148686
time for this epoch 35.92555499076843
No_decrease:  7
----------------an epoch starts-------------------
i_epoch:  162
# batch:  96
i_batch:  0.0
the loss for this batch:  0.13300613
flow loss 0.05293093
occ loss 0.08007183
time for this batch 0.26070356369018555
----------------------------------
i_batch:  64.0
the loss for this batch:  0.13962746
flow loss 0.053611625
occ loss 0.08601251
time for this batch 0.3038301467895508
----------------------------------
train loss for this epoch:  0.148328
time for this epoch 36.852476358413696
No_decrease:  8
----------------an epoch starts-------------------
i_epoch:  163
# batch:  96
i_batch:  0.0
the loss for this batch:  0.121926196
flow loss 0.05054193
occ loss 0.07138091
time for this batch 0.2625150680541992
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15320995
flow loss 0.055076167
occ loss 0.09812964
time for this batch 0.3072185516357422
----------------------------------
train loss for this epoch:  0.148273
time for this epoch 36.493934869766235
No_decrease:  9
----------------an epoch starts-------------------
i_epoch:  164
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1599451
flow loss 0.05622095
occ loss 0.1037204
time for this batch 0.32819652557373047
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15655085
flow loss 0.05764683
occ loss 0.09889996
time for this batch 0.29054689407348633
----------------------------------
train loss for this epoch:  0.148249
time for this epoch 37.230945348739624
No_decrease:  10
----------------an epoch starts-------------------
i_epoch:  165
# batch:  96
i_batch:  0.0
the loss for this batch:  0.12497856
flow loss 0.0517163
occ loss 0.073259
time for this batch 0.2600288391113281
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15341625
flow loss 0.055854034
occ loss 0.09755866
time for this batch 0.29476237297058105
----------------------------------
train loss for this epoch:  0.148194
time for this epoch 36.003459453582764
No_decrease:  11
----------------an epoch starts-------------------
i_epoch:  166
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16418839
flow loss 0.05628429
occ loss 0.1079
time for this batch 0.23946309089660645
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19118255
flow loss 0.0626505
occ loss 0.1285279
time for this batch 0.3184168338775635
----------------------------------
train loss for this epoch:  0.148222
time for this epoch 36.73204684257507
No_decrease:  12
----------------an epoch starts-------------------
i_epoch:  167
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16553316
flow loss 0.054340076
occ loss 0.111189485
time for this batch 0.25547075271606445
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1559886
flow loss 0.053324893
occ loss 0.10266001
time for this batch 0.2749302387237549
----------------------------------
train loss for this epoch:  0.147651
time for this epoch 36.6414155960083
No_decrease:  13
----------------an epoch starts-------------------
i_epoch:  168
# batch:  96
i_batch:  0.0
the loss for this batch:  0.09743351
flow loss 0.043516714
occ loss 0.05391389
time for this batch 0.2649660110473633
----------------------------------
i_batch:  64.0
the loss for this batch:  0.13244443
flow loss 0.05187124
occ loss 0.080569394
time for this batch 0.28249597549438477
----------------------------------
train loss for this epoch:  0.147857
time for this epoch 35.86417818069458
No_decrease:  14
----------------an epoch starts-------------------
i_epoch:  169
# batch:  96
i_batch:  0.0
the loss for this batch:  0.12791409
flow loss 0.052431818
occ loss 0.07547886
time for this batch 0.2996053695678711
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16294289
flow loss 0.059939656
occ loss 0.10299928
time for this batch 0.25591039657592773
----------------------------------
train loss for this epoch:  0.147864
time for this epoch 35.234233379364014
No_decrease:  15
----------------an epoch starts-------------------
i_epoch:  170
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14369164
flow loss 0.052479655
occ loss 0.09120847
time for this batch 0.26120805740356445
----------------------------------
i_batch:  64.0
the loss for this batch:  0.13683699
flow loss 0.05376484
occ loss 0.083068736
time for this batch 0.3128654956817627
----------------------------------
train loss for this epoch:  0.148016
time for this epoch 37.6161732673645
No_decrease:  16
----------------an epoch starts-------------------
i_epoch:  171
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1421266
flow loss 0.05348181
occ loss 0.088641025
time for this batch 0.2354123592376709
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14119884
flow loss 0.05177046
occ loss 0.0894247
time for this batch 0.309612512588501
----------------------------------
train loss for this epoch:  0.147895
time for this epoch 36.667091846466064
No_decrease:  17
----------------an epoch starts-------------------
i_epoch:  172
# batch:  96
i_batch:  0.0
the loss for this batch:  0.118515715
flow loss 0.050481316
occ loss 0.06803116
time for this batch 0.23918485641479492
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15910368
flow loss 0.056139506
occ loss 0.102960154
time for this batch 0.3157031536102295
----------------------------------
train loss for this epoch:  0.147847
time for this epoch 36.326873540878296
No_decrease:  18
----------------an epoch starts-------------------
i_epoch:  173
# batch:  96
i_batch:  0.0
the loss for this batch:  0.15701742
flow loss 0.055181738
occ loss 0.10183202
time for this batch 0.2698826789855957
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16998495
flow loss 0.056493104
occ loss 0.113487974
time for this batch 0.3132328987121582
----------------------------------
train loss for this epoch:  0.147683
time for this epoch 36.75192904472351
No_decrease:  19
----------------an epoch starts-------------------
i_epoch:  174
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16548747
flow loss 0.05941746
occ loss 0.10606601
time for this batch 0.25651073455810547
----------------------------------
i_batch:  64.0
the loss for this batch:  0.12980081
flow loss 0.04741963
occ loss 0.08237811
time for this batch 0.3014044761657715
----------------------------------
train loss for this epoch:  0.147649
time for this epoch 37.2434356212616
No_decrease:  20
----------------an epoch starts-------------------
i_epoch:  175
# batch:  96
i_batch:  0.0
the loss for this batch:  0.13429709
flow loss 0.04646454
occ loss 0.08782978
time for this batch 0.26686596870422363
----------------------------------
i_batch:  64.0
the loss for this batch:  0.13009898
flow loss 0.050194006
occ loss 0.07990201
time for this batch 0.30699968338012695
----------------------------------
train loss for this epoch:  0.147635
time for this epoch 36.66208600997925
No_decrease:  21
----------------an epoch starts-------------------
i_epoch:  176
# batch:  96
i_batch:  0.0
the loss for this batch:  0.17292935
flow loss 0.05908982
occ loss 0.11383555
time for this batch 0.2614457607269287
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14736788
flow loss 0.05609841
occ loss 0.091265574
time for this batch 0.24894332885742188
----------------------------------
train loss for this epoch:  0.147607
time for this epoch 36.20180106163025
No_decrease:  22
----------------an epoch starts-------------------
i_epoch:  177
# batch:  96
i_batch:  0.0
the loss for this batch:  0.11218548
flow loss 0.04676508
occ loss 0.06541755
time for this batch 0.2622804641723633
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14724809
flow loss 0.05475111
occ loss 0.092492886
time for this batch 0.31412839889526367
----------------------------------
train loss for this epoch:  0.147755
time for this epoch 36.29287052154541
No_decrease:  23
----------------an epoch starts-------------------
i_epoch:  178
# batch:  96
i_batch:  0.0
the loss for this batch:  0.16172826
flow loss 0.058760982
occ loss 0.10296306
time for this batch 0.2830810546875
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1763635
flow loss 0.060359105
occ loss 0.116000235
time for this batch 0.28503966331481934
----------------------------------
train loss for this epoch:  0.148071
time for this epoch 37.66193222999573
No_decrease:  24
----------------an epoch starts-------------------
i_epoch:  179
# batch:  96
i_batch:  0.0
the loss for this batch:  0.13073285
flow loss 0.051524747
occ loss 0.079204805
time for this batch 0.24927401542663574
----------------------------------
i_batch:  64.0
the loss for this batch:  0.11243472
flow loss 0.044522107
occ loss 0.067909665
time for this batch 0.30908966064453125
----------------------------------
train loss for this epoch:  0.147643
time for this epoch 38.01120924949646
No_decrease:  25
----------------an epoch starts-------------------
i_epoch:  180
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1802071
flow loss 0.06100902
occ loss 0.1191938
time for this batch 0.2650148868560791
----------------------------------
i_batch:  64.0
the loss for this batch:  0.13046998
flow loss 0.04770943
occ loss 0.082757466
time for this batch 0.309734582901001
----------------------------------
train loss for this epoch:  0.147751
time for this epoch 37.02081608772278
No_decrease:  26
----------------an epoch starts-------------------
i_epoch:  181
# batch:  96
i_batch:  0.0
the loss for this batch:  0.14609714
flow loss 0.05566163
occ loss 0.09043176
time for this batch 0.25281548500061035
----------------------------------
i_batch:  64.0
the loss for this batch:  0.13852595
flow loss 0.051228877
occ loss 0.087293245
time for this batch 0.30706357955932617
----------------------------------
train loss for this epoch:  0.147479
time for this epoch 36.8151068687439
No_decrease:  27
----------------an epoch starts-------------------
i_epoch:  182
# batch:  96
i_batch:  0.0
the loss for this batch:  0.13818757
flow loss 0.05411706
occ loss 0.08406724
time for this batch 0.2726554870605469
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17863788
flow loss 0.060442675
occ loss 0.11819139
time for this batch 0.28109145164489746
----------------------------------
train loss for this epoch:  0.147548
time for this epoch 35.303800106048584
No_decrease:  28
----------------an epoch starts-------------------
i_epoch:  183
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1214304
flow loss 0.04717075
occ loss 0.074256726
time for this batch 0.2654991149902344
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15354057
flow loss 0.056814034
occ loss 0.09672306
time for this batch 0.27810001373291016
----------------------------------
train loss for this epoch:  0.147498
time for this epoch 35.49075365066528
No_decrease:  29
----------------an epoch starts-------------------
i_epoch:  184
# batch:  96
i_batch:  0.0
the loss for this batch:  0.1360121
flow loss 0.055004288
occ loss 0.08100446
time for this batch 0.26726794242858887
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18823014
flow loss 0.05962501
occ loss 0.12860096
time for this batch 0.29994893074035645
----------------------------------
train loss for this epoch:  0.147601
time for this epoch 36.23290753364563
Early stop at the 185-th epoch

7: apply the model to vali and test¶

In [14]:
def apply_to_vali_test(model, vt, f_o_mean_std):
    f = vt["flow"]
    f_m = vt["flow_mask"].to(device)
    o = vt["occupancy"]
    o_m = vt["occupancy_mask"].to(device)
    f_mae, f_rmse, o_mae, o_rmse  = vali_test(model, f, f_m, o, o_m, f_o_mean_std, hyper["b_s_vt"])
    print ("flow_mae", f_mae)
    print ("flow_rmse", f_rmse)
    print ("occ_mae", o_mae)
    print ("occ_rmse", o_rmse)
    return f_mae, f_rmse, o_mae, o_rmse

Validate¶

In [15]:
vali_f_mae, vali_f_rmse, vali_o_mae, vali_o_rmse =\
    apply_to_vali_test(trained_model, vali, f_o_mean_std)
flow_mae 42.15036067679154
flow_rmse 71.01068767218318
occ_mae 0.034626120830231935
occ_rmse 0.0692719614612664

Test¶

In [16]:
test_f_mae, test_f_rmse, test_o_mae, test_o_rmse =\
    apply_to_vali_test(trained_model, test, f_o_mean_std)
flow_mae 40.792357901845904
flow_rmse 68.31812532539399
occ_mae 0.030586869657684364
occ_rmse 0.0622185935880154
In [ ]:
 
In [ ]:
 
In [ ]: